Approach for Fast Interpolation of Values within FVCOM Mesh
Published
October 18, 2024
Code
# Packages{library(raster) # netcdf data as rasterlibrary(sf) # vector spatial mapping/operationslibrary(fvcom) # fvcom mesh and variable extractionslibrary(ncdf4) # netcdf supportlibrary(tidyverse) # data wrangling and plottinglibrary(gmRi) # color schemes and cloud storage pathslibrary(patchwork) # plot arrangementlibrary(rnaturalearth) # coastlines and state polygonslibrary(geometry) # bathycentric coordinateslibrary(Matrix) # matrix algebralibrary(sysfonts) # font support}# Paths + conflictsconflicted::conflict_prefer("select", "dplyr")conflicted::conflict_prefer("filter", "dplyr")proj_path <-cs_path("mills", "Projects/Lobster ECOL")# fvcom_out <- str_c(proj_path, "FVCOM_support/")source(here::here("R/FVCOM_Support.R"))theme_set(theme_bw() +map_theme())# Shapefilesnew_england <-ne_states("united states of america", returnclass ="sf") %>%filter(postal %in%c("VT", "ME", "RI", "MA", "CT", "NH", "NY", "MD", "VA", "NJ", "DE", "NC", "PA", "WV"))canada <-ne_states("canada", returnclass ="sf")# Gom EPU to clipres_shapes <-cs_path("res", "Shapefiles")epu_path <-str_c(res_shapes, "EPU/")gom_poly <-read_sf(str_c(epu_path, "individual_epus/GOM.geojson"))
Code
# Use GMRI styleuse_gmri_style_rmd()
Point Interpolation of Temperature for Survey Stations
To acquire modeled bottom temperature values from the survey programs we need to interpolate values from the FVCOM hindcast data for that point in time using information from the nearest mesh nodes.
A way to do this is using linear interpolation or some other method. The following code will step through performing these steps for the federal bottom trawl survey. We can then compare how the FVCOM hindcast performs relative to the CTD data from the survey.
Load Trawl Data
Code
# Load the trawl datatrawl_path <-cs_path("res", "nmfs_trawl/SURVDAT_current")trawl_raw <-read_rds(str_c(trawl_path, "survdat_lw.rds"))# tidy it a littletrawl_dat <- gmRi::gmri_survdat_prep(survdat = trawl_raw$survdat, survdat_source ="most recent", box_location ="cloudstorage")# Get distinct time/date/tow/locationstrawl_locations <- trawl_dat %>%distinct(cruise6, station, stratum, tow, svvessel, est_towdate, season, decdeg_beglat, decdeg_beglon, bottemp_ctd = bottemp)
Load FVCOM Inventory
Code
# Surface and Bottom only, from Dr. Lifvcom_path <-cs_path("res", "FVCOM/Lobster-ECOL")# Here are the files we have, loop over them laterfvcom_surfbot_files <-setNames(list.files(fvcom_path, full.names = T, pattern =".nc"),str_remove(list.files(fvcom_path, full.names = F, pattern =".nc"), ".nc"))# Test File: GOM3 1978# Load some daily FVCOM that we downloaded and averagedfvcom_yrx <-nc_open(fvcom_surfbot_files["gom3_1978"])# Get the mesh itself as a simple feature collectiongom3_mesh <-get_mesh_geometry(fvcom_yrx, what ='lonlat')
Coverage Overlap
The following map shows the coverage overlap between trawl survey locations and FVCOM GOM3.
There are a number of projects where biological samples are taken, but environmental conditions might not also be taken simultaneously.
This adds the additional step of needing to index to the correct point in time and pull the correct node values associated with it.
The following code approaches identifying the correct date indices to loop/map through this interpolation process.
Get the Proper Time Index for Each File
Code
# Get the dates within each filefvcom_dates <-map_dfr( fvcom_surfbot_files,function(x){ x_fvcom <-nc_open(x) timez <-ncvar_get(x_fvcom, "Times")nc_close(x_fvcom)return(data.frame("fvcom_date"= timez) %>%mutate(time_idx =row_number())) }, .id ="fv_file")# Join them by date to look at their matchestrawl_dates_matched <- trawl_locations %>%mutate(tow_date =as.Date(est_towdate)) %>%left_join(mutate(fvcom_dates, fvcom_date =as.Date(fvcom_date)),join_by(tow_date == fvcom_date))
Get Proper Element and Node Weights for Points
Code
# Function to add the linear interpolation weights based on node coordinatestriangulation_linear_weighting <-function(pts_sf, fvcom_mesh){# Identify the triangles that overlap each point:# Use st_join to assign elem, p1, p2, p3 IDs to the points pts_assigned <-st_join(st_transform(pts_sf, st_crs(fvcom_mesh)), gom3_mesh, join = st_within) %>%drop_na(elem)# Iterate over the rows to add weights: pts_weighted <- pts_assigned %>% base::split(., seq_len(nrow(.))) %>% purrr::map_dfr(function(pt_assigned){# Subset the relevant triangle from st_join info triangle_match <- fvcom_mesh[pt_assigned$elem,]# Build matrices for point to interpolate & of surrounding points:# Matrix for triangle# Use the triangles node coordinates from the sf geometries# Creates 3x3 matrix: row1 x coords, row 2, y coords, row three rep(1,3) node_vertices <-t(st_coordinates(triangle_match[1,])[1:3,1:3])# Make matrix from the points:# creates 3x1 matrix: x, y, 1 point_coords <-matrix(c(st_coordinates(pt_assigned[1,]), 1), nrow =3)#### For Linear Interpolation:# Get inverse of the matrix inverse_coordmat <-solve(node_vertices)# Solve for the weights node_wts <- inverse_coordmat %*% point_coords %>%t() %>%as.data.frame() %>%setNames(c("p1_wt", "p2_wt", "p3_wt"))# Return with dataframebind_cols(pt_assigned, node_wts) })# End Rowwisereturn(pts_weighted)}
Code
# Make the trawl locations an sf dataframetrawl_pts_sf <- trawl_dates_matched %>%st_as_sf(coords =c("decdeg_beglon", "decdeg_beglat"), crs =4326, remove = F)# Run for all points:trawl_pts_weighted <-triangulation_linear_weighting(pts_sf = trawl_pts_sf, fvcom_mesh = gom3_mesh) %>%st_drop_geometry()
Apply Interpolations at Proper Time Step
Datewise Interpolation
This step can be looped on each date/year to minimize the amount of fvcom file opening/closing. Within each year we need to identify which timestep to extract data at, and then iterate on them.
Code
#' Interpolate Values from FVCOM Mesh at Timestep#' #' @description Takes a dataframe row containing node and time indices and interpolation weights and returns that contains time index from which to interpolate.#' #' Should contain the following columns:#' time_idx = integer timestep to use for interpolation#' p1 = integer index value for node 1 surrounding the interpolation location#' p2 = integer index value for node 2 surrounding the interpolation location#' p3 = integer index value for node 3 surrounding the interpolation location#' p1_wt = linear interpolation weight for p1#' p2_wt = linear interpolation weight for p1#' p3_wt = linear interpolation weight for p1#'#' @param dated_points_weighted dataframe row containing time index, node index, and node weight information columns#' @param fvcom_nc FVCOM netcdf file to extract values from#' @param fvcom_varid = String indicating variable in FVCOM netcdf to interpolate values with#' @param var_out String to use as variable name in returned dataframe#'#' @return#' @export#'#' @examplesinterpolate_at_timestep <-function(dated_points_weighted, fvcom_nc, fvcom_varid, var_out){# Get the values of the variable of interest as vector node_vals <-ncvar_get(nc = fvcom_nc, varid = fvcom_varid, start =c(1, dated_points_weighted[["time_idx"]]),count =c(-1, 1))# Interpolate using the node numbers and weights dated_interpolation <- dated_points_weighted %>%mutate( {{var_out}} := node_vals[p1] * p1_wt + node_vals[p2] * p2_wt + node_vals[p3] * p3_wt)return(dated_interpolation)}
Iterate on Years to Interpolate All Points
Now that we can pull/interpolate for a specific timestep, we can loop over the yearly files and obtain surface and bottom temperatures.
Code
# Operate over yearstrawl_fvcom_temps <- trawl_pts_weighted %>%#filter(year(est_towdate) == 2000) %>% drop_na(time_idx) %>%mutate(year =year(est_towdate)) %>%split(.$year) %>%map_dfr(.f =function(samples_year_x){# Get the file to open nc_name <- samples_year_x[["fv_file"]][[1]][[1]]# Open the corresponding Netcdf fvcom_yr_x <-nc_open(fvcom_surfbot_files[[nc_name]])# Split the samples by date locations_bydate <- samples_year_x %>% base::split(., seq_len(nrow(.))) # Iterate on those - do bottom temp and surface temp dates_interpolated <- locations_bydate %>%map(.f =~interpolate_at_timestep(dated_points_weighted = .x,fvcom_nc = fvcom_yr_x,fvcom_varid ="surface_t", var_out ="surface_temp")) %>%map_dfr(.f =~interpolate_at_timestep(dated_points_weighted = .x,fvcom_nc = fvcom_yr_x,fvcom_varid ="bottom_t", var_out ="bottom_temp"))return(dates_interpolated) })
# Take the VTS Survey Location and Time information:# Join them by their date to match them to FVCOM file names and their time indexvts_dates_matched <- vts_trips %>%mutate(Date =as.Date(Date)) %>%left_join(mutate(fvcom_dates, fvcom_date =as.Date(fvcom_date)),join_by(Date == fvcom_date))# Next, overlay the points to get the node ids# Also applies the weights for linear interpolation# Run for all points:vts_pts_weighted <-triangulation_linear_weighting(pts_sf = vts_dates_matched, fvcom_mesh = gom3_mesh) %>%st_drop_geometry()# Map over the yearly filesvts_fvcom_temps <- vts_pts_weighted %>%#filter(year(est_towdate) == 2000) %>% drop_na(time_idx) %>%mutate(year =year(Date)) %>%split(.$year) %>%map_dfr(.f =function(samples_year_x){# Get the file to open nc_name <- samples_year_x[["fv_file"]][[1]][[1]]# Open the corresponding Netcdf fvcom_yr_x <-nc_open(fvcom_surfbot_files[[nc_name]])# Split by row to iterate on each point locations_bydate <- samples_year_x %>% base::split(., seq_len(nrow(.))) # Iterate on those - do bottom temp and surface temp dates_interpolated <- locations_bydate %>%map(.f =~interpolate_at_timestep(dated_points_weighted = .x,fvcom_nc = fvcom_yr_x,fvcom_varid ="surface_t", var_out ="surface_temp")) %>%map_dfr(.f =~interpolate_at_timestep(dated_points_weighted = .x,fvcom_nc = fvcom_yr_x,fvcom_varid ="bottom_t", var_out ="bottom_temp"))return(dates_interpolated) })
---title: "Survey Program FVCOM Temperature Matching"description: | Approach for Fast Interpolation of Values within FVCOM Meshdate: "Updated on: `r Sys.Date()`"format: html: code-fold: true code-tools: true df-print: kable self-contained: trueexecute: echo: true warning: false message: false fig.align: "center" comment: ""---```{r}# Packages{library(raster) # netcdf data as rasterlibrary(sf) # vector spatial mapping/operationslibrary(fvcom) # fvcom mesh and variable extractionslibrary(ncdf4) # netcdf supportlibrary(tidyverse) # data wrangling and plottinglibrary(gmRi) # color schemes and cloud storage pathslibrary(patchwork) # plot arrangementlibrary(rnaturalearth) # coastlines and state polygonslibrary(geometry) # bathycentric coordinateslibrary(Matrix) # matrix algebralibrary(sysfonts) # font support}# Paths + conflictsconflicted::conflict_prefer("select", "dplyr")conflicted::conflict_prefer("filter", "dplyr")proj_path <-cs_path("mills", "Projects/Lobster ECOL")# fvcom_out <- str_c(proj_path, "FVCOM_support/")source(here::here("R/FVCOM_Support.R"))theme_set(theme_bw() +map_theme())# Shapefilesnew_england <-ne_states("united states of america", returnclass ="sf") %>%filter(postal %in%c("VT", "ME", "RI", "MA", "CT", "NH", "NY", "MD", "VA", "NJ", "DE", "NC", "PA", "WV"))canada <-ne_states("canada", returnclass ="sf")# Gom EPU to clipres_shapes <-cs_path("res", "Shapefiles")epu_path <-str_c(res_shapes, "EPU/")gom_poly <-read_sf(str_c(epu_path, "individual_epus/GOM.geojson"))``````{r}#| label: style-sheet#| results: asis# Use GMRI styleuse_gmri_style_rmd()``````{r}#| label: fonts-config#| echo: false# Path to the directory containing the font file (replace with your actual path)font_dir <-paste0(system.file("stylesheets", package ="gmRi"), "/GMRI_fonts/Avenir/")# Register the fontfont_add(family ="Avenir",file.path(font_dir, "LTe50342.ttf"),bold =file.path(font_dir, "LTe50340.ttf"),italic =file.path(font_dir, "LTe50343.ttf"),bolditalic =file.path(font_dir, "LTe50347.ttf"))# Load the fontshowtext::showtext_auto()```# Point Interpolation of Temperature for Survey StationsTo acquire modeled bottom temperature values from the survey programs we need to interpolate values from the FVCOM hindcast data for that point in time using information from the nearest mesh nodes.A way to do this is using linear interpolation or some other method. The following code will step through performing these steps for the federal bottom trawl survey. We can then compare how the FVCOM hindcast performs relative to the CTD data from the survey.### Load Trawl Data```{r}# Load the trawl datatrawl_path <-cs_path("res", "nmfs_trawl/SURVDAT_current")trawl_raw <-read_rds(str_c(trawl_path, "survdat_lw.rds"))# tidy it a littletrawl_dat <- gmRi::gmri_survdat_prep(survdat = trawl_raw$survdat, survdat_source ="most recent", box_location ="cloudstorage")# Get distinct time/date/tow/locationstrawl_locations <- trawl_dat %>%distinct(cruise6, station, stratum, tow, svvessel, est_towdate, season, decdeg_beglat, decdeg_beglon, bottemp_ctd = bottemp)```### Load FVCOM Inventory```{r}# Surface and Bottom only, from Dr. Lifvcom_path <-cs_path("res", "FVCOM/Lobster-ECOL")# Here are the files we have, loop over them laterfvcom_surfbot_files <-setNames(list.files(fvcom_path, full.names = T, pattern =".nc"),str_remove(list.files(fvcom_path, full.names = F, pattern =".nc"), ".nc"))# Test File: GOM3 1978# Load some daily FVCOM that we downloaded and averagedfvcom_yrx <-nc_open(fvcom_surfbot_files["gom3_1978"])# Get the mesh itself as a simple feature collectiongom3_mesh <-get_mesh_geometry(fvcom_yrx, what ='lonlat')```### Coverage OverlapThe following map shows the coverage overlap between trawl survey locations and FVCOM GOM3.```{r}# Map everythingggplot() +geom_sf(data = gom3_mesh, alpha =0.2, linewidth =0.1, color ="orange") +geom_sf(data =st_as_sf(trawl_locations, coords =c("decdeg_beglon", "decdeg_beglat"), crs =4326),shape =3, size =0.2, alpha =0.2) +geom_sf(data = new_england) +geom_sf(data = canada) +theme(legend.position ="right") +scale_fill_gmri() +coord_sf(xlim =c(-78, -65), ylim =c(35.5, 45)) +theme_bw() +map_theme() +labs(title ="Coverage overlap of FVCOM and Sample Points",fill ="Area")```# FVCOM Daily Product MatchingThere are a number of projects where biological samples are taken, but environmental conditions might not also be taken simultaneously. This adds the additional step of needing to index to the correct point in time and pull the correct node values associated with it.The following code approaches identifying the correct date indices to loop/map through this interpolation process.## Get the Proper Time Index for Each File```{r}#| label: surfbot date matching# Get the dates within each filefvcom_dates <-map_dfr( fvcom_surfbot_files,function(x){ x_fvcom <-nc_open(x) timez <-ncvar_get(x_fvcom, "Times")nc_close(x_fvcom)return(data.frame("fvcom_date"= timez) %>%mutate(time_idx =row_number())) }, .id ="fv_file")# Join them by date to look at their matchestrawl_dates_matched <- trawl_locations %>%mutate(tow_date =as.Date(est_towdate)) %>%left_join(mutate(fvcom_dates, fvcom_date =as.Date(fvcom_date)),join_by(tow_date == fvcom_date))```## Get Proper Element and Node Weights for Points```{r}#| label: triangulate points function# Function to add the linear interpolation weights based on node coordinatestriangulation_linear_weighting <-function(pts_sf, fvcom_mesh){# Identify the triangles that overlap each point:# Use st_join to assign elem, p1, p2, p3 IDs to the points pts_assigned <-st_join(st_transform(pts_sf, st_crs(fvcom_mesh)), gom3_mesh, join = st_within) %>%drop_na(elem)# Iterate over the rows to add weights: pts_weighted <- pts_assigned %>% base::split(., seq_len(nrow(.))) %>% purrr::map_dfr(function(pt_assigned){# Subset the relevant triangle from st_join info triangle_match <- fvcom_mesh[pt_assigned$elem,]# Build matrices for point to interpolate & of surrounding points:# Matrix for triangle# Use the triangles node coordinates from the sf geometries# Creates 3x3 matrix: row1 x coords, row 2, y coords, row three rep(1,3) node_vertices <-t(st_coordinates(triangle_match[1,])[1:3,1:3])# Make matrix from the points:# creates 3x1 matrix: x, y, 1 point_coords <-matrix(c(st_coordinates(pt_assigned[1,]), 1), nrow =3)#### For Linear Interpolation:# Get inverse of the matrix inverse_coordmat <-solve(node_vertices)# Solve for the weights node_wts <- inverse_coordmat %*% point_coords %>%t() %>%as.data.frame() %>%setNames(c("p1_wt", "p2_wt", "p3_wt"))# Return with dataframebind_cols(pt_assigned, node_wts) })# End Rowwisereturn(pts_weighted)}``````{r}#| label: get weights for each point# Make the trawl locations an sf dataframetrawl_pts_sf <- trawl_dates_matched %>%st_as_sf(coords =c("decdeg_beglon", "decdeg_beglat"), crs =4326, remove = F)# Run for all points:trawl_pts_weighted <-triangulation_linear_weighting(pts_sf = trawl_pts_sf, fvcom_mesh = gom3_mesh) %>%st_drop_geometry()```## Apply Interpolations at Proper Time Step## Datewise InterpolationThis step can be looped on each date/year to minimize the amount of fvcom file opening/closing. Within each year we need to identify which timestep to extract data at, and then iterate on them.```{r}#' Interpolate Values from FVCOM Mesh at Timestep#' #' @description Takes a dataframe row containing node and time indices and interpolation weights and returns that contains time index from which to interpolate.#' #' Should contain the following columns:#' time_idx = integer timestep to use for interpolation#' p1 = integer index value for node 1 surrounding the interpolation location#' p2 = integer index value for node 2 surrounding the interpolation location#' p3 = integer index value for node 3 surrounding the interpolation location#' p1_wt = linear interpolation weight for p1#' p2_wt = linear interpolation weight for p1#' p3_wt = linear interpolation weight for p1#'#' @param dated_points_weighted dataframe row containing time index, node index, and node weight information columns#' @param fvcom_nc FVCOM netcdf file to extract values from#' @param fvcom_varid = String indicating variable in FVCOM netcdf to interpolate values with#' @param var_out String to use as variable name in returned dataframe#'#' @return#' @export#'#' @examplesinterpolate_at_timestep <-function(dated_points_weighted, fvcom_nc, fvcom_varid, var_out){# Get the values of the variable of interest as vector node_vals <-ncvar_get(nc = fvcom_nc, varid = fvcom_varid, start =c(1, dated_points_weighted[["time_idx"]]),count =c(-1, 1))# Interpolate using the node numbers and weights dated_interpolation <- dated_points_weighted %>%mutate( {{var_out}} := node_vals[p1] * p1_wt + node_vals[p2] * p2_wt + node_vals[p3] * p3_wt)return(dated_interpolation)}```## Iterate on Years to Interpolate All PointsNow that we can pull/interpolate for a specific timestep, we can loop over the yearly files and obtain surface and bottom temperatures.```{r}# Operate over yearstrawl_fvcom_temps <- trawl_pts_weighted %>%#filter(year(est_towdate) == 2000) %>% drop_na(time_idx) %>%mutate(year =year(est_towdate)) %>%split(.$year) %>%map_dfr(.f =function(samples_year_x){# Get the file to open nc_name <- samples_year_x[["fv_file"]][[1]][[1]]# Open the corresponding Netcdf fvcom_yr_x <-nc_open(fvcom_surfbot_files[[nc_name]])# Split the samples by date locations_bydate <- samples_year_x %>% base::split(., seq_len(nrow(.))) # Iterate on those - do bottom temp and surface temp dates_interpolated <- locations_bydate %>%map(.f =~interpolate_at_timestep(dated_points_weighted = .x,fvcom_nc = fvcom_yr_x,fvcom_varid ="surface_t", var_out ="surface_temp")) %>%map_dfr(.f =~interpolate_at_timestep(dated_points_weighted = .x,fvcom_nc = fvcom_yr_x,fvcom_varid ="bottom_t", var_out ="bottom_temp"))return(dates_interpolated) })```## Check ResultsCompare against recorded bottom temperatures:```{r}trawl_fvcom_temps %>%ggplot(aes(bottemp_ctd, bottom_temp)) +geom_point(alpha =0.2) +geom_abline(slope =1, intercept =0, color ="royalblue", linewidth =1.5) +labs(x ="CTD Bottom Temperature",y ="FVCOM Bottom Temperature")```Show a map of where biases a season looks like:```{r}# Function to convert Fahrenheit to Celsiusfahrenheit_to_celsius <-function(f) { (f -32) *5/9}trawl_fvcom_temps %>%mutate(fvcom_bias = bottom_temp - bottemp_ctd) %>%drop_na(fvcom_bias) %>%st_as_sf(coords =c("decdeg_beglon", "decdeg_beglat"), crs =4326) %>%ggplot() +geom_sf(data = new_england) +geom_sf(data = canada) +geom_sf(aes(color = fvcom_bias, fill = fvcom_bias), alpha =0.5, size =0.5) +geom_abline(slope =1, intercept =0, color ="royalblue", linewidth =1.5) +scale_fill_distiller(palette ="RdBu", limits =c(-10, 10), labels =function(x) {# Add both Fahrenheit and Celsius labelspaste0(round(as_fahrenheit(x, data_type ="anomalies")), "°F / ", x, "°C") }) +scale_color_distiller(palette ="RdBu", limits =c(-10, 10), labels =function(x) {# Add both Fahrenheit and Celsius labelspaste0(round(as_fahrenheit(x, data_type ="anomalies")), "°F / ", x, "°C") }) +facet_wrap(~fct_rev(season)) +theme_dark() +coord_sf(xlim =c(-78, -65), ylim =c(34, 45)) +labs(x ="Lon",y ="Lat",title ="FVCOM Bottom Temperature Biases vs. NEFSC Trawl CTD",subtitle ="FVCOM Bias = FVCOM Bottom Temperature - CTD Bottom Temperature")```# Repeat for VTS SurveyWe also have point locations for the VTS survey.```{r}#| label: load VTS points# Path to resourcesvts_path <-cs_path("mills", "Projects/Lobster/VTS_fromASMFC")# Maineload(str_c(vts_path, "VTS_data.Rdata"))# Massload(str_c(vts_path, "VTS_MA_Proc_240201 all data for standardized index.Rdata"))# # Naming convention seems (mostly) the same# names(Trips) == names(Trips_MA)# names(Trawls) == names(Trawls_MA)# names(Traps) == names(Traps_MA)# names(Sites) == names(Sites_MA)# # Check what their contents are# glimpse(Trips) # TripId, Date*# glimpse(Trawls) # TrawlId, TripId, SiteId, Lon, Lat# glimpse(Sites) # SiteId, Lon, Lat# glimpse(Traps) # TrawlId, TrapId, # Need Trips (for date) and Trawls, and Sites I thinkvts_trips <-inner_join(bind_rows(Trips, mutate(Trips_MA, Fisher =as.character(Fisher))),bind_rows(Trawls, Trawls_MA), join_by(TripId)) %>%distinct(TripId, TrawlId, SiteId, Date, Longitude, Latitude) %>%st_as_sf(coords =c("Longitude", "Latitude"), crs =4326, remove = F) ```### VTS Survey Map```{r}vts_trips %>%ggplot() +geom_sf(alpha =0.3, size =0.5, shape =3)```### Extract VTS Temepratures```{r}# Take the VTS Survey Location and Time information:# Join them by their date to match them to FVCOM file names and their time indexvts_dates_matched <- vts_trips %>%mutate(Date =as.Date(Date)) %>%left_join(mutate(fvcom_dates, fvcom_date =as.Date(fvcom_date)),join_by(Date == fvcom_date))# Next, overlay the points to get the node ids# Also applies the weights for linear interpolation# Run for all points:vts_pts_weighted <-triangulation_linear_weighting(pts_sf = vts_dates_matched, fvcom_mesh = gom3_mesh) %>%st_drop_geometry()# Map over the yearly filesvts_fvcom_temps <- vts_pts_weighted %>%#filter(year(est_towdate) == 2000) %>% drop_na(time_idx) %>%mutate(year =year(Date)) %>%split(.$year) %>%map_dfr(.f =function(samples_year_x){# Get the file to open nc_name <- samples_year_x[["fv_file"]][[1]][[1]]# Open the corresponding Netcdf fvcom_yr_x <-nc_open(fvcom_surfbot_files[[nc_name]])# Split by row to iterate on each point locations_bydate <- samples_year_x %>% base::split(., seq_len(nrow(.))) # Iterate on those - do bottom temp and surface temp dates_interpolated <- locations_bydate %>%map(.f =~interpolate_at_timestep(dated_points_weighted = .x,fvcom_nc = fvcom_yr_x,fvcom_varid ="surface_t", var_out ="surface_temp")) %>%map_dfr(.f =~interpolate_at_timestep(dated_points_weighted = .x,fvcom_nc = fvcom_yr_x,fvcom_varid ="bottom_t", var_out ="bottom_temp"))return(dates_interpolated) })```### Map Interpolated Values```{r}# Make a plotvts_fvcom_temps %>%st_as_sf(coords =c("Longitude", "Latitude"), crs =4326, remove = F) %>%ggplot() +geom_sf(aes(color = bottom_temp), size =0.5, alpha =0.4) +geom_sf(data = new_england) +geom_sf(data = canada) +scale_color_distiller(palette ="RdBu") +coord_sf(xlim =c(-71, -67), ylim =c(41, 45))```### Plot Both Surveys Together```{r}# Make a plotvts_fvcom_temps %>%st_as_sf(coords =c("Longitude", "Latitude"), crs =4326, remove = F) %>%ggplot() +geom_sf(aes(color = bottom_temp, shape ="VTS Survey"), size =0.5, alpha =0.8) +geom_sf(data = trawl_fvcom_temps %>%st_as_sf(coords =c("decdeg_beglon", "decdeg_beglat"), crs =4326),aes(color = bottom_temp, shape ="NEFSC Trawl Survey"),size =0.6, alpha =0.8) +geom_sf(data = new_england) +geom_sf(data = canada) +scale_color_distiller(palette ="RdBu") +# coord_sf(xlim = c(-71, -67), ylim = c(41, 45)) +coord_sf(xlim =c(-78, -65), ylim =c(34, 45)) +map_theme() +labs(x ="",y ="",title ="NEFSC + VTS Survey Interpolations" )```## Export```{r}# # Save VTS Bottom temperatures# write_csv(trawl_fvcom_temps, here::here("data/point_location_temperatures", "NEFSCtrawl_location_fvcom_temps.csv"))# write_csv(vts_fvcom_temps, here::here("data/point_location_temperatures", "VTS_location_fvcom_temps.csv"))```